In [2]:
import glob
import math
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
import numpy as np
import random
import sklearn.metrics as metrics

from tensorflow.keras import optimizers
from tensorflow.keras.callbacks import ModelCheckpoint, CSVLogger, LearningRateScheduler
from tensorflow.keras.models import Model
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.layers import add, concatenate, Conv2D, Dense, Dropout, Flatten, Input
from tensorflow.keras.layers import Activation, AveragePooling2D, BatchNormalization, MaxPooling2D
from tensorflow.keras.regularizers import l2
from tensorflow.keras.utils import to_categorical


%matplotlib inline
In [3]:
                            # Set up 'ggplot' style
plt.style.use('ggplot')     # if want to use the default style, set 'classic'
plt.rcParams['ytick.right']     = True
plt.rcParams['ytick.labelright']= True
plt.rcParams['ytick.left']      = False
plt.rcParams['ytick.labelleft'] = False
plt.rcParams['font.family']     = 'Arial'
In [4]:
# where am i?
%pwd
Out[4]:
'C:\\Users\\david\\Documents\\ImageNet'
In [8]:
%ls
 Volume in drive C is Acer
 Volume Serial Number is F2E5-64E8

 Directory of C:\Users\david\Documents\ImageNet

09/14/2019  04:33 PM    <DIR>          .
09/14/2019  04:33 PM    <DIR>          ..
09/09/2019  01:02 AM                43 .gitattributes
08/22/2019  11:06 PM                26 .gitignore
09/08/2019  11:00 PM    <DIR>          .ipynb_checkpoints
09/14/2019  04:17 PM         1,216,519 Create_Train_Test_Set.ipynb
09/14/2019  03:53 PM    <DIR>          data
08/22/2019  11:09 PM           455,126 Download-ImageNet.html
09/09/2019  12:35 AM           288,923 Download-ImageNet.ipynb
09/03/2019  09:40 PM           367,769 Download-Pexels.html
09/09/2019  12:35 AM            94,549 Download-Pexels.ipynb
09/09/2019  01:02 AM        10,518,772 fgs-imgs.npz
09/08/2019  11:18 PM        41,976,052 fgs-imgs128.npz
09/08/2019  11:18 PM        23,611,636 fgs-imgs96.npz
09/14/2019  03:57 PM        49,130,740 fgsOpnImg-imgs96.npz
09/12/2019  02:34 AM            24,691 FlowerPower.csv
09/12/2019  01:40 AM        42,602,920 FlowerPower.hdf5
09/14/2019  03:06 PM       226,409,716 flr102-imgs96.npz
09/09/2019  01:02 AM        15,728,884 flr-imgs.npz
09/08/2019  11:18 PM        62,374,132 flr-imgs128.npz
09/08/2019  11:18 PM        35,085,556 flr-imgs96.npz
09/09/2019  01:02 AM        13,295,860 flrnonflr-test-imgs.npz
09/08/2019  11:18 PM        52,445,428 flrnonflr-test-imgs128.npz
09/08/2019  11:18 PM        29,500,660 flrnonflr-test-imgs96-0.8.npz
09/14/2019  04:13 PM       102,187,252 flrnonflr-test-imgs96-0.8+.npz
09/08/2019  11:18 PM        14,764,276 flrnonflr-test-imgs96-0.9.npz
09/09/2019  01:02 AM             8,900 flrnonflr-test-labels.npz
09/08/2019  11:18 PM             8,780 flrnonflr-test-labels128.npz
09/08/2019  11:18 PM             8,780 flrnonflr-test-labels96-0.8.npz
09/08/2019  11:18 PM             4,516 flrnonflr-test-labels96-0.9.npz
09/14/2019  04:13 PM            29,812 flrnonflr-test-labels96-0+.8.npz
09/09/2019  01:02 AM        53,133,556 flrnonflr-train-imgs.npz
09/08/2019  11:18 PM       209,584,372 flrnonflr-train-imgs128.npz
09/08/2019  11:18 PM       117,891,316 flrnonflr-train-imgs96-0.8.npz
09/14/2019  04:13 PM       408,748,276 flrnonflr-train-imgs96-0.8+.npz
09/08/2019  11:18 PM       132,627,700 flrnonflr-train-imgs96-0.9.npz
09/09/2019  01:02 AM            34,836 flrnonflr-train-labels.npz
09/08/2019  11:18 PM            34,356 flrnonflr-train-labels128.npz
09/08/2019  11:18 PM            34,356 flrnonflr-train-labels96-0.8.npz
09/14/2019  04:13 PM           118,516 flrnonflr-train-labels96-0.8+.npz
09/08/2019  11:18 PM            38,620 flrnonflr-train-labels96-0.9.npz
08/17/2019  11:53 AM           124,162 ImageNet-Flowers.txt
08/17/2019  03:54 PM            75,692 ImageNet-Fungus.txt
08/17/2019  03:57 PM            81,424 ImageNet-Rocks.txt
09/03/2019  09:40 PM           128,688 Pexels-Flowers.txt
09/03/2019  09:40 PM            28,575 Pexels-Umbrellas.txt
09/09/2019  01:02 AM        22,733,044 pxl_flr-imgs.npz
09/08/2019  11:18 PM        88,080,628 pxl_flr-imgs128.npz
09/08/2019  11:18 PM        49,545,460 pxl_flr-imgs96.npz
09/09/2019  01:02 AM         5,173,492 pxl_umb-imgs.npz
09/08/2019  11:18 PM        20,594,932 pxl_umb-imgs128.npz
09/08/2019  11:18 PM        11,584,756 pxl_umb-imgs96.npz
09/09/2019  01:02 AM        12,275,956 rck-imgs.npz
09/08/2019  11:18 PM        49,004,788 rck-imgs128.npz
09/08/2019  11:18 PM        27,565,300 rck-imgs96.npz
09/14/2019  04:01 PM    <DIR>          readings
08/22/2019  11:02 PM                44 README.md
09/14/2019  04:21 PM           417,457 Reshape_Resize_Images.ipynb
09/09/2019  12:48 AM         8,546,104 train_Neural_Network (Conv2D, 96-0.8).html
09/11/2019  01:01 AM         4,494,650 train_Neural_Network (ResNetV1, 96-0.8, Dropout + added layer + RMSProp, try6).html
09/11/2019  10:59 PM         6,116,768 train_Neural_Network (ResNetV1, 96-0.8, Dropout + added layer + RMSProp, try7).html
09/12/2019  02:35 AM         5,851,809 train_Neural_Network (ResNetV1, 96-0.8, Dropout + added layer + RMSProp, try8).html
09/09/2019  03:08 AM         3,900,219 train_Neural_Network (ResNetV1, 96-0.8, Dropout + added layer, try3).html
09/09/2019  11:09 PM         6,528,529 train_Neural_Network (ResNetV1, 96-0.8, Dropout + added layer, try4).html
09/10/2019  08:44 PM         6,636,754 train_Neural_Network (ResNetV1, 96-0.8, Dropout + added layer, try5).html
09/09/2019  01:32 AM         6,583,279 train_Neural_Network (ResNetV1, 96-0.8, Dropout, try1).html
09/09/2019  02:40 AM         6,300,696 train_Neural_Network (ResNetV1, 96-0.8, Dropout, try2).html
09/09/2019  01:23 AM         6,446,135 train_Neural_Network (ResNetV1, 96-0.8, no Dropout, try1).html
09/14/2019  04:33 PM         5,541,066 train_Neural_Network.ipynb
09/14/2019  04:08 PM        88,003,828 umbOpnImg-imgs96.npz
              65 File(s)  2,086,750,457 bytes
               5 Dir(s)  84,120,698,880 bytes free
In [5]:
flowers = glob.glob('./data/flr_*.jpg')
fungus = glob.glob('./data/fgs_*.jpg')
rocks = glob.glob('./data/rck_*.jpg')

pixel_flowers = glob.glob('./data/pxl_flower_*.jpeg')
pixel_umbrella = glob.glob('./data/pxl_umbrella_*.jpeg')
print("There are %s, %s flower, %s fungus, %s rock and %s umbrella pictures" %(len(flowers), len(pixel_flowers), len(fungus), len(rocks), len(pixel_umbrella)))
There are 1269, 1792 flower, 856 fungus, 1007 rock and 420 umbrella pictures
In [6]:
# Randomly show 10 examples of the images
from IPython.display import Image
    
dataset = flowers #flowers #fungus #rocks

for i in range(0, 5):
    index = random.randint(0, len(dataset)-1)   
    print("Showing:", dataset[index])
    
    img = mpimg.imread(dataset[index])
    imgplot = plt.imshow(img)
    plt.show()

#Image(dataset[index])
Showing: ./data\flr_01481.jpg
Showing: ./data\flr_01547.jpg
Showing: ./data\flr_01720.jpg
Showing: ./data\flr_00766.jpg
Showing: ./data\flr_00327.jpg

Extract the training and testing datasets

In [9]:
# Load the data
trDatOrg       = np.load('flrnonflr-train-imgs96-0.8+.npz')['arr_0']
trLblOrg       = np.load('flrnonflr-train-labels96-0.8+.npz')['arr_0']
tsDatOrg       = np.load('flrnonflr-test-imgs96-0.8+.npz')['arr_0']
tsLblOrg       = np.load('flrnonflr-test-labels96-0.8+.npz')['arr_0']
In [10]:
print("For the training and test datasets:")
print("The shapes are %s, %s, %s, %s" \
      %(trDatOrg.shape, trLblOrg.shape, tsDatOrg.shape, tsLblOrg.shape))
For the training and test datasets:
The shapes are (14784, 96, 96, 3), (14784,), (3696, 96, 96, 3), (3696,)
In [11]:
# Randomly show 10 examples of the images

data = tsDatOrg
label = tsLblOrg

for i in range(20):
    index = random.randint(0, len(data)-1)
    print("Showing %s index image, It is %s" %(index, label[index]))
    imgplot = plt.imshow(data[index])
    plt.show()
Showing 101 index image, It is 1.0
Showing 3382 index image, It is 0.0
Showing 3622 index image, It is 0.0
Showing 3056 index image, It is 0.0
Showing 786 index image, It is 1.0
Showing 1779 index image, It is 1.0
Showing 322 index image, It is 1.0
Showing 2997 index image, It is 0.0
Showing 772 index image, It is 1.0
Showing 3561 index image, It is 0.0
Showing 1801 index image, It is 1.0
Showing 43 index image, It is 1.0
Showing 264 index image, It is 1.0
Showing 1544 index image, It is 1.0
Showing 1321 index image, It is 1.0
Showing 1233 index image, It is 1.0
Showing 2758 index image, It is 0.0
Showing 169 index image, It is 1.0
Showing 787 index image, It is 1.0
Showing 70 index image, It is 1.0
In [12]:
# Convert the data into 'float32'
# Rescale the values from 0~255 to 0~1
trDat       = trDatOrg.astype('float32')/255
tsDat       = tsDatOrg.astype('float32')/255

# Retrieve the row size of each image
# Retrieve the column size of each image
imgrows     = trDat.shape[1]
imgclms     = trDat.shape[2]
channel     = 3

# # reshape the data to be [samples][width][height][channel]
# # This is required by Keras framework
# trDat       = trDat.reshape(trDat.shape[0], imgrows, imgclms, channel)
# tsDat       = tsDat.reshape(tsDat.shape[0], imgrows, imgclms, channel)

# Perform one hot encoding on the labels
# Retrieve the number of classes in this problem
trLbl       = to_categorical(trLblOrg)
tsLbl       = to_categorical(tsLblOrg)
num_classes = tsLbl.shape[1]
In [13]:
# fix random seed for reproducibility
seed = 29
np.random.seed(seed)


modelname = 'FlowerPower'

def createBaselineModel():
    inputs = Input(shape=(imgrows, imgclms, channel))
    x = Conv2D(30, (4, 4), activation='relu')(inputs)
    x = MaxPooling2D(pool_size=(2, 2))(x)
    x = Conv2D(50, (4, 4), activation='relu')(x)
    x = MaxPooling2D(pool_size=(2, 2))(x)
    x = Dropout(0.3)(x)
    x = Flatten()(x)
    x = Dense(32, activation='relu')(x)
    x = Dense(num_classes, activation='softmax')(x)
    
    model = Model(inputs=[inputs],outputs=x)
    
    model.compile(loss='categorical_crossentropy', 
                  optimizer='adam',
                  metrics=['accuracy'])
    return model

def resLyr(inputs,
           numFilters=16,
           kernelSz=3,
           strides=1,
           activation='relu',
           batchNorm=True,
           convFirst=True,
           lyrName=None):
    convLyr = Conv2D(numFilters, kernel_size=kernelSz, strides=strides, 
                     padding='same', kernel_initializer='he_normal', 
                     kernel_regularizer=l2(1e-4), 
                     name=lyrName+'_conv' if lyrName else None)
    x = inputs
    if convFirst:
        x = convLyr(x)
        if batchNorm:
            x = BatchNormalization(name=lyrName+'_bn' if lyrName else None)(x)
        if activation is not None:
            x = Activation(activation,name=lyrName+'_'+activation if lyrName else None)(x)
    else:
        if batchNorm:
            x = BatchNormalization(name=lyrName+'_bn' if lyrName else None)(x)
        if activation is not None:
            x = Activation(activation, name=lyrName+'_'+activation if lyrName else None)(x)
        x = convLyr(x)
    return x


def resBlkV1(inputs,
             numFilters=16,
             numBlocks=3,
             downsampleOnFirst=True,
             names=None):
    x = inputs
    for run in range(0,numBlocks):
        strides = 1
        blkStr = str(run+1)
        if downsampleOnFirst and run == 0:
            strides = 2
        y = resLyr(inputs=x, numFilters=numFilters, strides=strides,
                   lyrName=names+'_Blk'+blkStr+'_Res1' if names else None)
        y = resLyr(inputs=y, numFilters=numFilters, activation=None,
                   lyrName=names+'_Blk'+blkStr+'_Res2' if names else None)
        if downsampleOnFirst and run == 0:
            x = resLyr(inputs=x, numFilters=numFilters, kernelSz=1,
                       strides=strides, activation=None, batchNorm=False,
                       lyrName=names+'_Blk'+blkStr+'_lin' if names else None)
        x = add([x,y], name=names+'_Blk'+blkStr+'_add' if names else None)
        x = Activation('relu', name=names+'_Blk'+blkStr+'_relu' if names else None)(x)
    return x

#optmz = optimizers.Adam(lr=0.001)
optmz = optimizers.RMSprop(lr=0.001)

def createResNetV1(inputShape=(imgrows, imgclms, channel),
                   numClasses=2):
    inputs = Input(shape=inputShape)
    v = resLyr(inputs, lyrName='Inpt')
    v = resBlkV1(inputs=v, numFilters=16, numBlocks=3,
                 downsampleOnFirst=False, names='Stg1')
    v = Dropout(0.30)(v)
    v = resBlkV1(inputs=v, numFilters=32, numBlocks=3,
                 downsampleOnFirst=True, names='Stg2')
    v = Dropout(0.40)(v)
    v = resBlkV1(inputs=v, numFilters=64, numBlocks=3,
                 downsampleOnFirst=True, names='Stg3')
    v = Dropout(0.50)(v)
    v = resBlkV1(inputs=v, numFilters=128, numBlocks=3,
                 downsampleOnFirst=True, names='Stg4')
    v = Dropout(0.50)(v)
    v = resBlkV1(inputs=v, numFilters=128, numBlocks=3,
                 downsampleOnFirst=False, names='Stg5')
    v = Dropout(0.50)(v)
    v = resBlkV1(inputs=v, numFilters=256, numBlocks=3,
                 downsampleOnFirst=True, names='Stg6')
    v = Dropout(0.50)(v)
    v = AveragePooling2D(pool_size=6, name='AvgPool')(v)
    v = Flatten()(v) 
    outputs = Dense(numClasses, activation='softmax', 
                    kernel_initializer='he_normal')(v)
    model = Model(inputs=inputs,outputs=outputs)
    model.compile(loss='categorical_crossentropy', 
                  optimizer=optmz, 
                  metrics=['accuracy'])
    return model



# Setup the models
model       = createResNetV1() # This is meant for training
modelGo     = createResNetV1() # This is used for final testing

model.summary()
WARNING:tensorflow:From D:\DocumentsDDrive\Installed_Files\Anaconda3\envs\tf-gpu\lib\site-packages\tensorflow\python\keras\initializers.py:104: calling VarianceScaling.__init__ (from tensorflow.python.ops.init_ops) with distribution=normal is deprecated and will be removed in a future version.
Instructions for updating:
`normal` is a deprecated alias for `truncated_normal`
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_1 (InputLayer)            (None, 96, 96, 3)    0                                            
__________________________________________________________________________________________________
Inpt_conv (Conv2D)              (None, 96, 96, 16)   448         input_1[0][0]                    
__________________________________________________________________________________________________
Inpt_bn (BatchNormalization)    (None, 96, 96, 16)   64          Inpt_conv[0][0]                  
__________________________________________________________________________________________________
Inpt_relu (Activation)          (None, 96, 96, 16)   0           Inpt_bn[0][0]                    
__________________________________________________________________________________________________
Stg1_Blk1_Res1_conv (Conv2D)    (None, 96, 96, 16)   2320        Inpt_relu[0][0]                  
__________________________________________________________________________________________________
Stg1_Blk1_Res1_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk1_Res1_relu (Activation (None, 96, 96, 16)   0           Stg1_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk1_Res2_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg1_Blk1_Res2_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk1_add (Add)             (None, 96, 96, 16)   0           Inpt_relu[0][0]                  
                                                                 Stg1_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk1_relu (Activation)     (None, 96, 96, 16)   0           Stg1_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg1_Blk2_Res1_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg1_Blk2_Res1_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk2_Res1_relu (Activation (None, 96, 96, 16)   0           Stg1_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk2_Res2_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg1_Blk2_Res2_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk2_add (Add)             (None, 96, 96, 16)   0           Stg1_Blk1_relu[0][0]             
                                                                 Stg1_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk2_relu (Activation)     (None, 96, 96, 16)   0           Stg1_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg1_Blk3_Res1_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg1_Blk3_Res1_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk3_Res1_relu (Activation (None, 96, 96, 16)   0           Stg1_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk3_Res2_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg1_Blk3_Res2_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk3_add (Add)             (None, 96, 96, 16)   0           Stg1_Blk2_relu[0][0]             
                                                                 Stg1_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk3_relu (Activation)     (None, 96, 96, 16)   0           Stg1_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout (Dropout)               (None, 96, 96, 16)   0           Stg1_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg2_Blk1_Res1_conv (Conv2D)    (None, 48, 48, 32)   4640        dropout[0][0]                    
__________________________________________________________________________________________________
Stg2_Blk1_Res1_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk1_Res1_relu (Activation (None, 48, 48, 32)   0           Stg2_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk1_Res2_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg2_Blk1_lin_conv (Conv2D)     (None, 48, 48, 32)   544         dropout[0][0]                    
__________________________________________________________________________________________________
Stg2_Blk1_Res2_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk1_add (Add)             (None, 48, 48, 32)   0           Stg2_Blk1_lin_conv[0][0]         
                                                                 Stg2_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk1_relu (Activation)     (None, 48, 48, 32)   0           Stg2_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg2_Blk2_Res1_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg2_Blk2_Res1_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk2_Res1_relu (Activation (None, 48, 48, 32)   0           Stg2_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk2_Res2_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg2_Blk2_Res2_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk2_add (Add)             (None, 48, 48, 32)   0           Stg2_Blk1_relu[0][0]             
                                                                 Stg2_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk2_relu (Activation)     (None, 48, 48, 32)   0           Stg2_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg2_Blk3_Res1_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg2_Blk3_Res1_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk3_Res1_relu (Activation (None, 48, 48, 32)   0           Stg2_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk3_Res2_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg2_Blk3_Res2_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk3_add (Add)             (None, 48, 48, 32)   0           Stg2_Blk2_relu[0][0]             
                                                                 Stg2_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk3_relu (Activation)     (None, 48, 48, 32)   0           Stg2_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout_1 (Dropout)             (None, 48, 48, 32)   0           Stg2_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg3_Blk1_Res1_conv (Conv2D)    (None, 24, 24, 64)   18496       dropout_1[0][0]                  
__________________________________________________________________________________________________
Stg3_Blk1_Res1_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk1_Res1_relu (Activation (None, 24, 24, 64)   0           Stg3_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk1_Res2_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg3_Blk1_lin_conv (Conv2D)     (None, 24, 24, 64)   2112        dropout_1[0][0]                  
__________________________________________________________________________________________________
Stg3_Blk1_Res2_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk1_add (Add)             (None, 24, 24, 64)   0           Stg3_Blk1_lin_conv[0][0]         
                                                                 Stg3_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk1_relu (Activation)     (None, 24, 24, 64)   0           Stg3_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg3_Blk2_Res1_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg3_Blk2_Res1_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk2_Res1_relu (Activation (None, 24, 24, 64)   0           Stg3_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk2_Res2_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg3_Blk2_Res2_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk2_add (Add)             (None, 24, 24, 64)   0           Stg3_Blk1_relu[0][0]             
                                                                 Stg3_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk2_relu (Activation)     (None, 24, 24, 64)   0           Stg3_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg3_Blk3_Res1_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg3_Blk3_Res1_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk3_Res1_relu (Activation (None, 24, 24, 64)   0           Stg3_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk3_Res2_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg3_Blk3_Res2_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk3_add (Add)             (None, 24, 24, 64)   0           Stg3_Blk2_relu[0][0]             
                                                                 Stg3_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk3_relu (Activation)     (None, 24, 24, 64)   0           Stg3_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout_2 (Dropout)             (None, 24, 24, 64)   0           Stg3_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg4_Blk1_Res1_conv (Conv2D)    (None, 12, 12, 128)  73856       dropout_2[0][0]                  
__________________________________________________________________________________________________
Stg4_Blk1_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk1_Res1_relu (Activation (None, 12, 12, 128)  0           Stg4_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk1_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg4_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg4_Blk1_lin_conv (Conv2D)     (None, 12, 12, 128)  8320        dropout_2[0][0]                  
__________________________________________________________________________________________________
Stg4_Blk1_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk1_add (Add)             (None, 12, 12, 128)  0           Stg4_Blk1_lin_conv[0][0]         
                                                                 Stg4_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk1_relu (Activation)     (None, 12, 12, 128)  0           Stg4_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg4_Blk2_Res1_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg4_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg4_Blk2_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk2_Res1_relu (Activation (None, 12, 12, 128)  0           Stg4_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk2_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg4_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg4_Blk2_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk2_add (Add)             (None, 12, 12, 128)  0           Stg4_Blk1_relu[0][0]             
                                                                 Stg4_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk2_relu (Activation)     (None, 12, 12, 128)  0           Stg4_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg4_Blk3_Res1_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg4_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg4_Blk3_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk3_Res1_relu (Activation (None, 12, 12, 128)  0           Stg4_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk3_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg4_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg4_Blk3_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk3_add (Add)             (None, 12, 12, 128)  0           Stg4_Blk2_relu[0][0]             
                                                                 Stg4_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk3_relu (Activation)     (None, 12, 12, 128)  0           Stg4_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout_3 (Dropout)             (None, 12, 12, 128)  0           Stg4_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg5_Blk1_Res1_conv (Conv2D)    (None, 12, 12, 128)  147584      dropout_3[0][0]                  
__________________________________________________________________________________________________
Stg5_Blk1_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk1_Res1_relu (Activation (None, 12, 12, 128)  0           Stg5_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk1_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg5_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg5_Blk1_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk1_add (Add)             (None, 12, 12, 128)  0           dropout_3[0][0]                  
                                                                 Stg5_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk1_relu (Activation)     (None, 12, 12, 128)  0           Stg5_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg5_Blk2_Res1_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg5_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg5_Blk2_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk2_Res1_relu (Activation (None, 12, 12, 128)  0           Stg5_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk2_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg5_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg5_Blk2_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk2_add (Add)             (None, 12, 12, 128)  0           Stg5_Blk1_relu[0][0]             
                                                                 Stg5_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk2_relu (Activation)     (None, 12, 12, 128)  0           Stg5_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg5_Blk3_Res1_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg5_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg5_Blk3_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk3_Res1_relu (Activation (None, 12, 12, 128)  0           Stg5_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk3_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg5_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg5_Blk3_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk3_add (Add)             (None, 12, 12, 128)  0           Stg5_Blk2_relu[0][0]             
                                                                 Stg5_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk3_relu (Activation)     (None, 12, 12, 128)  0           Stg5_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout_4 (Dropout)             (None, 12, 12, 128)  0           Stg5_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg6_Blk1_Res1_conv (Conv2D)    (None, 6, 6, 256)    295168      dropout_4[0][0]                  
__________________________________________________________________________________________________
Stg6_Blk1_Res1_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk1_Res1_relu (Activation (None, 6, 6, 256)    0           Stg6_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk1_Res2_conv (Conv2D)    (None, 6, 6, 256)    590080      Stg6_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg6_Blk1_lin_conv (Conv2D)     (None, 6, 6, 256)    33024       dropout_4[0][0]                  
__________________________________________________________________________________________________
Stg6_Blk1_Res2_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk1_add (Add)             (None, 6, 6, 256)    0           Stg6_Blk1_lin_conv[0][0]         
                                                                 Stg6_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk1_relu (Activation)     (None, 6, 6, 256)    0           Stg6_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg6_Blk2_Res1_conv (Conv2D)    (None, 6, 6, 256)    590080      Stg6_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg6_Blk2_Res1_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk2_Res1_relu (Activation (None, 6, 6, 256)    0           Stg6_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk2_Res2_conv (Conv2D)    (None, 6, 6, 256)    590080      Stg6_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg6_Blk2_Res2_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk2_add (Add)             (None, 6, 6, 256)    0           Stg6_Blk1_relu[0][0]             
                                                                 Stg6_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk2_relu (Activation)     (None, 6, 6, 256)    0           Stg6_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg6_Blk3_Res1_conv (Conv2D)    (None, 6, 6, 256)    590080      Stg6_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg6_Blk3_Res1_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk3_Res1_relu (Activation (None, 6, 6, 256)    0           Stg6_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk3_Res2_conv (Conv2D)    (None, 6, 6, 256)    590080      Stg6_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg6_Blk3_Res2_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk3_add (Add)             (None, 6, 6, 256)    0           Stg6_Blk2_relu[0][0]             
                                                                 Stg6_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk3_relu (Activation)     (None, 6, 6, 256)    0           Stg6_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout_5 (Dropout)             (None, 6, 6, 256)    0           Stg6_Blk3_relu[0][0]             
__________________________________________________________________________________________________
AvgPool (AveragePooling2D)      (None, 1, 1, 256)    0           dropout_5[0][0]                  
__________________________________________________________________________________________________
flatten (Flatten)               (None, 256)          0           AvgPool[0][0]                    
__________________________________________________________________________________________________
dense (Dense)                   (None, 2)            514         flatten[0][0]                    
==================================================================================================
Total params: 5,270,786
Trainable params: 5,263,266
Non-trainable params: 7,520
__________________________________________________________________________________________________
In [14]:
# Create checkpoint for the training
# This checkpoint performs model saving when
# an epoch gives highest testing accuracy
# filepath        = modelname + ".hdf5"
# checkpoint      = ModelCheckpoint(filepath, 
#                                   monitor='val_acc', 
#                                   verbose=0, 
#                                   save_best_only=True, 
#                                   mode='max')

#                             # Log the epoch detail into csv
# csv_logger      = CSVLogger(modelname +'.csv')
# callbacks_list  = [checkpoint,csv_logger]

def lrSchedule(epoch):
    lr  = 1e-3
    
    if epoch > 270: #190
        lr  *= 0.5e-3
        
    elif epoch > 240: #160
        lr  *= 1e-3
        
    elif epoch > 200: #140
        lr  *= 1e-2
        
    elif epoch > 150: #100
        lr  *= 1e-1
        
    print('Learning rate: ', lr)
    
    return lr

LRScheduler     = LearningRateScheduler(lrSchedule)

                            # Create checkpoint for the training
                            # This checkpoint performs model saving when
                            # an epoch gives highest testing accuracy
filepath        = modelname + ".hdf5"
checkpoint      = ModelCheckpoint(filepath, 
                                  monitor='val_acc', 
                                  verbose=0, 
                                  save_best_only=True, 
                                  mode='max')

                            # Log the epoch detail into csv
csv_logger      = CSVLogger(modelname +'.csv')
callbacks_list  = [checkpoint, csv_logger, LRScheduler]
#callbacks_list  = [checkpoint, csv_logger]
In [15]:
# Fit the model
# This is where the training starts
# model.fit(trDat, 
#           trLbl, 
#           validation_data=(tsDat, tsLbl), 
#           epochs=120, 
#           batch_size=32,
#           callbacks=callbacks_list)

datagen = ImageDataGenerator(width_shift_range=0.25,
                             height_shift_range=0.25,
                             rotation_range=45,
                             zoom_range=0.8,
                             #zca_epsilon=1e-6,
                             #zca_whitening=True,
                             fill_mode='nearest',
                             horizontal_flip=True,
                             vertical_flip=False)

model.fit_generator(datagen.flow(trDat, trLbl, batch_size=16),
                    validation_data=(tsDat, tsLbl),
                    epochs=60, #300 
                    verbose=1,
                    steps_per_epoch=len(trDat)/16,
                    callbacks=callbacks_list)
Learning rate:  0.001
Epoch 1/60
924/924 [==============================] - 161s 174ms/step - loss: 1.3539 - acc: 0.7308 - val_loss: 1.5369 - val_acc: 0.4056
Learning rate:  0.001
Epoch 2/60
924/924 [==============================] - 140s 151ms/step - loss: 0.6830 - acc: 0.7927 - val_loss: 0.7779 - val_acc: 0.6228
Learning rate:  0.001
Epoch 3/60
924/924 [==============================] - 140s 152ms/step - loss: 0.5578 - acc: 0.8091 - val_loss: 0.7525 - val_acc: 0.7132
Learning rate:  0.001
Epoch 4/60
924/924 [==============================] - 141s 153ms/step - loss: 0.4868 - acc: 0.8294 - val_loss: 0.4649 - val_acc: 0.8377
Learning rate:  0.001
Epoch 5/60
924/924 [==============================] - 142s 154ms/step - loss: 0.4579 - acc: 0.8356 - val_loss: 0.4239 - val_acc: 0.8601
Learning rate:  0.001
Epoch 6/60
924/924 [==============================] - 140s 151ms/step - loss: 0.4426 - acc: 0.8380 - val_loss: 0.4400 - val_acc: 0.8225
Learning rate:  0.001
Epoch 7/60
924/924 [==============================] - 140s 152ms/step - loss: 0.4271 - acc: 0.8401 - val_loss: 1.5996 - val_acc: 0.6166
Learning rate:  0.001
Epoch 8/60
924/924 [==============================] - 142s 153ms/step - loss: 0.4057 - acc: 0.8526 - val_loss: 0.3637 - val_acc: 0.8693
Learning rate:  0.001
Epoch 9/60
924/924 [==============================] - 141s 152ms/step - loss: 0.4046 - acc: 0.8540 - val_loss: 0.3758 - val_acc: 0.8948
Learning rate:  0.001
Epoch 10/60
924/924 [==============================] - 140s 151ms/step - loss: 0.3920 - acc: 0.8569 - val_loss: 0.3200 - val_acc: 0.8912
Learning rate:  0.001
Epoch 11/60
924/924 [==============================] - 143s 155ms/step - loss: 0.3928 - acc: 0.8563 - val_loss: 0.9008 - val_acc: 0.8065
Learning rate:  0.001
Epoch 12/60
924/924 [==============================] - 140s 151ms/step - loss: 0.3811 - acc: 0.8594 - val_loss: 0.3321 - val_acc: 0.8907
Learning rate:  0.001
Epoch 13/60
924/924 [==============================] - 142s 154ms/step - loss: 0.3805 - acc: 0.8606 - val_loss: 0.3393 - val_acc: 0.8912
Learning rate:  0.001
Epoch 14/60
924/924 [==============================] - 146s 158ms/step - loss: 0.3759 - acc: 0.8609 - val_loss: 0.5987 - val_acc: 0.8212
Learning rate:  0.001
Epoch 15/60
924/924 [==============================] - 141s 153ms/step - loss: 0.3710 - acc: 0.8615 - val_loss: 0.3706 - val_acc: 0.8872
Learning rate:  0.001
Epoch 16/60
924/924 [==============================] - 141s 153ms/step - loss: 0.3633 - acc: 0.8675 - val_loss: 0.2839 - val_acc: 0.9096
Learning rate:  0.001
Epoch 17/60
924/924 [==============================] - 142s 153ms/step - loss: 0.3640 - acc: 0.8697 - val_loss: 0.7352 - val_acc: 0.7646
Learning rate:  0.001
Epoch 18/60
924/924 [==============================] - 142s 154ms/step - loss: 0.3585 - acc: 0.8701 - val_loss: 0.3089 - val_acc: 0.8945
Learning rate:  0.001
Epoch 19/60
924/924 [==============================] - 143s 155ms/step - loss: 0.3593 - acc: 0.8726 - val_loss: 0.2910 - val_acc: 0.8999
Learning rate:  0.001
Epoch 20/60
924/924 [==============================] - 144s 156ms/step - loss: 0.3492 - acc: 0.8745 - val_loss: 0.6852 - val_acc: 0.7941
Learning rate:  0.001
Epoch 21/60
924/924 [==============================] - 141s 153ms/step - loss: 0.3484 - acc: 0.8756 - val_loss: 0.4296 - val_acc: 0.8406
Learning rate:  0.001
Epoch 22/60
924/924 [==============================] - 141s 152ms/step - loss: 0.3460 - acc: 0.8786 - val_loss: 0.3905 - val_acc: 0.8471
Learning rate:  0.001
Epoch 23/60
924/924 [==============================] - 144s 156ms/step - loss: 0.3483 - acc: 0.8780 - val_loss: 0.3685 - val_acc: 0.8761
Learning rate:  0.001
Epoch 24/60
924/924 [==============================] - 141s 153ms/step - loss: 0.3455 - acc: 0.8766 - val_loss: 0.3530 - val_acc: 0.9050
Learning rate:  0.001
Epoch 25/60
924/924 [==============================] - 141s 153ms/step - loss: 0.3374 - acc: 0.8817 - val_loss: 0.4623 - val_acc: 0.8260
Learning rate:  0.001
Epoch 26/60
924/924 [==============================] - 142s 153ms/step - loss: 0.3360 - acc: 0.8816 - val_loss: 0.2967 - val_acc: 0.9029
Learning rate:  0.001
Epoch 27/60
924/924 [==============================] - 141s 153ms/step - loss: 0.3335 - acc: 0.8847 - val_loss: 0.3202 - val_acc: 0.8956
Learning rate:  0.001
Epoch 28/60
924/924 [==============================] - 141s 153ms/step - loss: 0.3290 - acc: 0.8832 - val_loss: 0.2803 - val_acc: 0.9102
Learning rate:  0.001
Epoch 29/60
924/924 [==============================] - 3954s 4s/step - loss: 0.3345 - acc: 0.8839 - val_loss: 0.2765 - val_acc: 0.9118
Learning rate:  0.001
Epoch 30/60
924/924 [==============================] - 145s 157ms/step - loss: 0.3301 - acc: 0.8862 - val_loss: 0.2474 - val_acc: 0.9196
Learning rate:  0.001
Epoch 31/60
924/924 [==============================] - 142s 154ms/step - loss: 0.3292 - acc: 0.8852 - val_loss: 0.2982 - val_acc: 0.9034
Learning rate:  0.001
Epoch 32/60
924/924 [==============================] - 144s 156ms/step - loss: 0.3321 - acc: 0.8841 - val_loss: 0.3102 - val_acc: 0.8883
Learning rate:  0.001
Epoch 33/60
924/924 [==============================] - 141s 152ms/step - loss: 0.3248 - acc: 0.8883 - val_loss: 0.4302 - val_acc: 0.8341
Learning rate:  0.001
Epoch 34/60
924/924 [==============================] - 141s 153ms/step - loss: 0.3213 - acc: 0.8905 - val_loss: 0.3326 - val_acc: 0.9131
Learning rate:  0.001
Epoch 35/60
924/924 [==============================] - 143s 154ms/step - loss: 0.3194 - acc: 0.8901 - val_loss: 0.4162 - val_acc: 0.8588
Learning rate:  0.001
Epoch 36/60
924/924 [==============================] - 149s 161ms/step - loss: 0.3235 - acc: 0.8889 - val_loss: 0.2920 - val_acc: 0.9067
Learning rate:  0.001
Epoch 37/60
924/924 [==============================] - 151s 163ms/step - loss: 0.3177 - acc: 0.8889 - val_loss: 0.5402 - val_acc: 0.8228
Learning rate:  0.001
Epoch 38/60
924/924 [==============================] - 150s 163ms/step - loss: 0.3154 - acc: 0.8902 - val_loss: 0.2506 - val_acc: 0.9240
Learning rate:  0.001
Epoch 39/60
924/924 [==============================] - 148s 160ms/step - loss: 0.3150 - acc: 0.8907 - val_loss: 0.2970 - val_acc: 0.8972
Learning rate:  0.001
Epoch 40/60
924/924 [==============================] - 150s 162ms/step - loss: 0.3161 - acc: 0.8924 - val_loss: 0.2774 - val_acc: 0.9072
Learning rate:  0.001
Epoch 41/60
924/924 [==============================] - 150s 162ms/step - loss: 0.3150 - acc: 0.8897 - val_loss: 0.3108 - val_acc: 0.8950
Learning rate:  0.001
Epoch 42/60
924/924 [==============================] - 149s 162ms/step - loss: 0.3098 - acc: 0.8954 - val_loss: 0.3563 - val_acc: 0.8701
Learning rate:  0.001
Epoch 43/60
924/924 [==============================] - 149s 162ms/step - loss: 0.3078 - acc: 0.8937 - val_loss: 0.2684 - val_acc: 0.9104
Learning rate:  0.001
Epoch 44/60
924/924 [==============================] - 148s 161ms/step - loss: 0.3078 - acc: 0.8939 - val_loss: 0.2848 - val_acc: 0.9175
Learning rate:  0.001
Epoch 45/60
924/924 [==============================] - 148s 160ms/step - loss: 0.3091 - acc: 0.8936 - val_loss: 0.2478 - val_acc: 0.9218
Learning rate:  0.001
Epoch 46/60
924/924 [==============================] - 152s 164ms/step - loss: 0.3085 - acc: 0.8942 - val_loss: 0.2986 - val_acc: 0.8980
Learning rate:  0.001
Epoch 47/60
924/924 [==============================] - 151s 163ms/step - loss: 0.3064 - acc: 0.8948 - val_loss: 0.2692 - val_acc: 0.9218
Learning rate:  0.001
Epoch 48/60
924/924 [==============================] - 147s 159ms/step - loss: 0.3065 - acc: 0.8954 - val_loss: 0.2275 - val_acc: 0.9297
Learning rate:  0.001
Epoch 49/60
924/924 [==============================] - 148s 160ms/step - loss: 0.3042 - acc: 0.8958 - val_loss: 0.2498 - val_acc: 0.9253
Learning rate:  0.001
Epoch 50/60
924/924 [==============================] - 147s 159ms/step - loss: 0.3023 - acc: 0.8956 - val_loss: 0.5571 - val_acc: 0.8009
Learning rate:  0.001
Epoch 51/60
924/924 [==============================] - 144s 156ms/step - loss: 0.3040 - acc: 0.8970 - val_loss: 0.2674 - val_acc: 0.9118
Learning rate:  0.001
Epoch 52/60
924/924 [==============================] - 147s 159ms/step - loss: 0.3011 - acc: 0.8977 - val_loss: 0.3409 - val_acc: 0.8877
Learning rate:  0.001
Epoch 53/60
924/924 [==============================] - 153s 165ms/step - loss: 0.3008 - acc: 0.8995 - val_loss: 0.2818 - val_acc: 0.9180
Learning rate:  0.001
Epoch 54/60
924/924 [==============================] - 148s 160ms/step - loss: 0.2971 - acc: 0.9002 - val_loss: 0.2364 - val_acc: 0.9264
Learning rate:  0.001
Epoch 55/60
924/924 [==============================] - 151s 163ms/step - loss: 0.2964 - acc: 0.8995 - val_loss: 0.2857 - val_acc: 0.9021
Learning rate:  0.001
Epoch 56/60
924/924 [==============================] - 148s 161ms/step - loss: 0.2988 - acc: 0.8983 - val_loss: 0.2593 - val_acc: 0.9210
Learning rate:  0.001
Epoch 57/60
924/924 [==============================] - 147s 159ms/step - loss: 0.2913 - acc: 0.9023 - val_loss: 0.2385 - val_acc: 0.9259
Learning rate:  0.001
Epoch 58/60
924/924 [==============================] - 142s 154ms/step - loss: 0.2978 - acc: 0.8975 - val_loss: 0.2651 - val_acc: 0.9050
Learning rate:  0.001
Epoch 59/60
924/924 [==============================] - 143s 155ms/step - loss: 0.2937 - acc: 0.9001 - val_loss: 0.2688 - val_acc: 0.9186
Learning rate:  0.001
Epoch 60/60
924/924 [==============================] - 141s 152ms/step - loss: 0.2947 - acc: 0.9015 - val_loss: 0.4500 - val_acc: 0.8488
Out[15]:
<tensorflow.python.keras.callbacks.History at 0x2478b437470>
In [16]:
## Now the training is complete, we get
# another object to load the weights
# compile it, so that we can do 
# final evaluation on it
modelGo.load_weights(filepath)
modelGo.compile(loss='categorical_crossentropy', 
                optimizer='adam', 
                metrics=['accuracy'])
In [17]:
# Make classification on the test dataset
predicts    = modelGo.predict(tsDat)

# Prepare the classification output
# for the classification report
predout     = np.argmax(predicts,axis=1)
testout     = np.argmax(tsLbl,axis=1)
labelname   = ['non-flower', 'flower']
                                            # the labels for the classfication report


testScores  = metrics.accuracy_score(testout,predout)
confusion   = metrics.confusion_matrix(testout,predout)


print("Best accuracy (on testing dataset): %.2f%%" % (testScores*100))
print(metrics.classification_report(testout,predout,target_names=labelname,digits=4))
print(confusion)
Best accuracy (on testing dataset): 92.97%
              precision    recall  f1-score   support

  non-flower     0.8891    0.9371    0.9125      1446
      flower     0.9581    0.9249    0.9412      2250

    accuracy                         0.9297      3696
   macro avg     0.9236    0.9310    0.9268      3696
weighted avg     0.9311    0.9297    0.9300      3696

[[1355   91]
 [ 169 2081]]
In [18]:
import pandas as pd

records     = pd.read_csv(modelname +'.csv')
plt.figure()
plt.subplot(211)
plt.plot(records['val_loss'])
plt.plot(records['loss'])
plt.yticks([0, 0.20, 0.30, 0.4, 0.5])
plt.title('Loss value',fontsize=12)

ax          = plt.gca()
ax.set_xticklabels([])



plt.subplot(212)
plt.plot(records['val_acc'])
plt.plot(records['acc'])
plt.yticks([0.7, 0.8, 0.9, 1.0])
plt.title('Accuracy',fontsize=12)
plt.show()
In [19]:
wrong_ans_index = []

for i in range(len(predout)):
    if predout[i] != testout[i]:
        wrong_ans_index.append(i)
In [20]:
wrong_ans_index = list(set(wrong_ans_index))
In [ ]:
# Randomly show X examples of that was wrong

dataset = tsDatOrg #flowers #fungus #rocks

for index in wrong_ans_index:
    #index = wrong_ans_index[random.randint(0, len(wrong_ans_index)-1)]
    print("Showing %s index image" %(index))
    print("Predicted as %s but is actually %s" %(predout[index], testout[index]))
    imgplot = plt.imshow(data[index])
    plt.show()
Showing 2048 index image
Predicted as 0 but is actually 1
Showing 2051 index image
Predicted as 0 but is actually 1
Showing 4 index image
Predicted as 0 but is actually 1
Showing 3588 index image
Predicted as 1 but is actually 0
Showing 3079 index image
Predicted as 1 but is actually 0
Showing 3156 index image
Predicted as 1 but is actually 0
Showing 17 index image
Predicted as 0 but is actually 1
Showing 1022 index image
Predicted as 0 but is actually 1
Showing 2583 index image
Predicted as 1 but is actually 0
Showing 536 index image
Predicted as 0 but is actually 1
Showing 1049 index image
Predicted as 0 but is actually 1
Showing 3668 index image
Predicted as 1 but is actually 0
Showing 2588 index image
Predicted as 1 but is actually 0
Showing 32 index image
Predicted as 0 but is actually 1
Showing 33 index image
Predicted as 0 but is actually 1
Showing 1572 index image
Predicted as 0 but is actually 1
Showing 37 index image
Predicted as 0 but is actually 1
Showing 2085 index image
Predicted as 0 but is actually 1
Showing 1065 index image
Predicted as 0 but is actually 1
Showing 3625 index image
Predicted as 1 but is actually 0
Showing 1579 index image
Predicted as 0 but is actually 1
Showing 2093 index image
Predicted as 0 but is actually 1
Showing 2094 index image
Predicted as 0 but is actually 1
Showing 3631 index image
Predicted as 1 but is actually 0
Showing 2610 index image
Predicted as 1 but is actually 0
Showing 564 index image
Predicted as 0 but is actually 1
Showing 1079 index image
Predicted as 0 but is actually 1
Showing 571 index image
Predicted as 0 but is actually 1
Showing 61 index image
Predicted as 0 but is actually 1
Showing 2111 index image
Predicted as 0 but is actually 1
Showing 3136 index image
Predicted as 1 but is actually 0
Showing 3137 index image
Predicted as 1 but is actually 0
Showing 2117 index image
Predicted as 0 but is actually 1
Showing 70 index image
Predicted as 0 but is actually 1
Showing 2630 index image
Predicted as 1 but is actually 0
Showing 2632 index image
Predicted as 1 but is actually 0
Showing 3141 index image
Predicted as 1 but is actually 0
Showing 3656 index image
Predicted as 1 but is actually 0
Showing 75 index image
Predicted as 0 but is actually 1
Showing 2127 index image
Predicted as 0 but is actually 1
Showing 1616 index image
Predicted as 0 but is actually 1
Showing 3152 index image
Predicted as 1 but is actually 0
Showing 1618 index image
Predicted as 0 but is actually 1
Showing 1619 index image
Predicted as 0 but is actually 1
Showing 2132 index image
Predicted as 0 but is actually 1
Showing 597 index image
Predicted as 0 but is actually 1
Showing 1109 index image
Predicted as 0 but is actually 1
Showing 2134 index image
Predicted as 0 but is actually 1
Showing 88 index image
Predicted as 0 but is actually 1
Showing 89 index image
Predicted as 0 but is actually 1
Showing 601 index image
Predicted as 0 but is actually 1
Showing 2135 index image
Predicted as 0 but is actually 1
Showing 3155 index image
Predicted as 1 but is actually 0
Showing 1117 index image
Predicted as 0 but is actually 1
Showing 2654 index image
Predicted as 1 but is actually 0
Showing 1632 index image
Predicted as 0 but is actually 1
Showing 1121 index image
Predicted as 0 but is actually 1
Showing 1122 index image
Predicted as 0 but is actually 1
Showing 2144 index image
Predicted as 0 but is actually 1
Showing 2658 index image
Predicted as 1 but is actually 0
Showing 1642 index image
Predicted as 0 but is actually 1
Showing 3180 index image
Predicted as 1 but is actually 0
Showing 621 index image
Predicted as 0 but is actually 1
Showing 3695 index image
Predicted as 1 but is actually 0
Showing 2164 index image
Predicted as 0 but is actually 1
Showing 631 index image
Predicted as 0 but is actually 1
Showing 3192 index image
Predicted as 1 but is actually 0
Showing 636 index image
Predicted as 0 but is actually 1
Showing 2172 index image
Predicted as 0 but is actually 1
Showing 1153 index image
Predicted as 0 but is actually 1
Showing 642 index image
Predicted as 0 but is actually 1
Showing 2691 index image
Predicted as 1 but is actually 0
Showing 3203 index image
Predicted as 1 but is actually 0
Showing 1669 index image
Predicted as 0 but is actually 1
Showing 646 index image
Predicted as 0 but is actually 1
Showing 647 index image
Predicted as 0 but is actually 1
Showing 2181 index image
Predicted as 0 but is actually 1
Showing 3207 index image
Predicted as 1 but is actually 0
Showing 3208 index image
Predicted as 1 but is actually 0
Showing 2189 index image
Predicted as 0 but is actually 1
Showing 1678 index image
Predicted as 0 but is actually 1
Showing 146 index image
Predicted as 0 but is actually 1
Showing 147 index image
Predicted as 0 but is actually 1
Showing 2195 index image
Predicted as 0 but is actually 1
Showing 2709 index image
Predicted as 1 but is actually 0
Showing 662 index image
Predicted as 0 but is actually 1
Showing 1175 index image
Predicted as 0 but is actually 1
Showing 1688 index image
Predicted as 0 but is actually 1
Showing 2198 index image
Predicted as 0 but is actually 1
Showing 2199 index image
Predicted as 0 but is actually 1
Showing 2713 index image
Predicted as 1 but is actually 0
Showing 3224 index image
Predicted as 1 but is actually 0
Showing 1183 index image
Predicted as 0 but is actually 1
Showing 677 index image
Predicted as 0 but is actually 1
Showing 1190 index image
Predicted as 0 but is actually 1
Showing 1704 index image
Predicted as 0 but is actually 1
Showing 2216 index image
Predicted as 0 but is actually 1
Showing 1706 index image
Predicted as 0 but is actually 1
Showing 171 index image
Predicted as 0 but is actually 1
Showing 3244 index image
Predicted as 1 but is actually 0
Showing 2221 index image
Predicted as 0 but is actually 1
Showing 686 index image
Predicted as 0 but is actually 1
Showing 2227 index image
Predicted as 0 but is actually 1
Showing 694 index image
Predicted as 0 but is actually 1
Showing 2743 index image
Predicted as 1 but is actually 0
Showing 2744 index image
Predicted as 1 but is actually 0
Showing 2749 index image
Predicted as 1 but is actually 0
In [19]:
# Stacking 3 NNs?